import com.microsoft.azure.synapse.ml.services.anomaly.FitMultivariateAnomaly
val startTime: String = "2021-01-01T00:00:00Z"
val endTime: String = "2021-01-02T12:00:00Z"
val timestampColumn: String = "timestamp"
val inputColumns: Array[String] = Array("feature0", "feature1", "feature2")
val intermediateSaveDir: String = "wasbs://madtest@anomalydetectiontest.blob.core.windows.net/intermediateData"
val anomalyKey = sys.env.getOrElse("ANOMALY_API_KEY", None)
val simpleFitMultivariateAnomaly = (new SimpleFitMultivariateAnomaly()
.setSubscriptionKey(anomalyKey)
.setLocation("westus2")
.setOutputCol("result")
.setStartTime(startTime)
.setEndTime(endTime)
.setIntermediateSaveDir(intermediateSaveDir)
.setTimestampCol(timestampColumn)
.setInputCols(inputColumns)
.setSlidingWindow(50))
val df = (spark.read.format("csv")
.option("header", True)
.load("wasbs://datasets@mmlspark.blob.core.windows.net/MAD/mad_example.csv"))
val model = simpleFitMultivariateAnomaly.fit(df)
val result = (model
.setStartTime(startTime)
.setEndTime(endTime)
.setOutputCol("result")
.setTimestampCol(timestampColumn)
.setInputCols(inputColumns)
.transform(df))
result.show()
simpleFitMultivariateAnomaly.cleanUpIntermediateData()
model.cleanUpIntermediateData()